home *** CD-ROM | disk | FTP | other *** search
- #include "Neural Network.h"
- #include <math.h>
-
- extern FILE * Jac;
-
- extern NeuralNet * theNet;
- extern DTypeVector yData;
- extern DTypeMatrix XData;
- extern DTypeVector Alpha[];
- extern DTypeVector Pi;
- extern DTypeVector Diag;
- extern DTypeVector gradSS;
-
- static unsigned int totparms; /* total parms in model, set by AllotSearchWorkSpace() */
- static int itncount = 0;
- static double baseSS;
- static double patSS;
- static double RelStep;
- static DTypeVector * base;
- static DTypeVector * base_;
-
- /*----------------------
- Hooke and Jeeve's method for direct search to find least squares, see
- "Methods of Optimization" by G.R. Walsh.
- Expects:
- 1. tolerance value steptol; stored in NeuralNet structure.
- 2. validly defined and alloted NeuralNet structure.
- 3. input data in matrix XData and output data values in vector yData.
- 4. total number of parameters for model in "totparms".
- Returns termcode = FAIL if current parm value is not an approximate critical point
- = METSTEPTOL if scaled step is less than steptol
- = EXCEEDITNLIM if iteratation limit exceeded
- */
- do_HookeJeeves()
- {
- int termcode = 0;
- int success;
-
- base = Π
- base_ = &gradSS;
- RelStep = 1.0;
- SaveParms(&gradSS); /* save current weight values in vector Pi */
-
- printf("Starting Hooke and Jeeves method\n");
- while(termcode == 0)
- { patSS = Compute_SS();
- success = ExMove();
- switch(success)
- { case TRUE: /* accept new base point, stored in W[] of net */
- {
- PatMove(); /* exits on failure to find new base point */
- break;
- }
- case FALSE: /* ExMove failed, so reduce step size and continue */
- {
- RelStep = .5*RelStep;
- if (RelStep < theNet->steptol)
- termcode = METSTEPTOL;
- else if (itncount > theNet->itnlimit)
- termcode = EXCEDITNLIM;
- break;
- }
- } /* end of "switch(success)" */
- } /* end of "while(termcode == 0)" */
-
- return(termcode);
- }
-
- /*-----------------------
- Pattern move where previous successful steps given in Diag and current weights given
- in Pi.
- */
- PatMove()
- {
- int j,k,N;
- int flag = 0;
- double ss;
- DTypeVector * temp;
- DataType * w; /* pointer to weight matrix */
- DataType * b; /* pointer to previous base point, use Pi vector */
- DataType * b_; /* pointer to current base point, use gradSS vector */
-
- SaveParms(base); /* save weights as base in case need to abandon new pattern */
- baseSS = patSS; /* save SS for comparison to patSS after pattern search */
-
- do
- {
- /*---- install new pattern step and update patSS ----*/
- b_ = *base_->cells;
- for(j=0; j<theNet->OutLayer; j++)
- { N = (theNet->W[j].rows)*(theNet->W[j].cols);
- w = *theNet->W[j].cells;
- for(k=0; k<N; k++, w++, b_++)
- *w = 2*(*w) - *b_; /* weights are still at base values here */
- }
- patSS = Compute_SS(); /* get new reference SS for exploratory move */
-
- /*---- try an exploratory step from new pattern ----*/
- ExMove(); /* don't need success flag from ExMove()
- leaves best SS value in patSS*/
-
- /*---- check if explore step found better point ----*/
- /*---- regardless of succ of failure of pattern step, set "base_" to "base"
- and prepare to find a new base, either by another pattern move if previous
- pattern move was successful, or another exploratory move if previous pattern
- move unsuccessful*/
- if(patSS<baseSS) /* T => successful pattern step so prepare to make one more */
- { temp = base_;
- base_ = base; /* this accepts new base, makes it the old base */
- base = temp;
- SaveParms(base);/* save weights as new base */
- baseSS = patSS; /* reset the base SS value */
- }
- else /* F => unsuccessful pattern step so prepare for next explore move */
- { RestoreParms(base); /* abandon new weights, go back to base */
- temp = base_;
- base_ = base; /* this accepts new base, makes it the old base */
- base = temp;
- flag = 1; /* set flag to exit pattern move */
- }
-
- } while(flag < 1);
- printf("patSS = %lf \n",patSS);
- }
-
- /*----------------------
- Do the exploratory moves.
- Resets patSS to minimum SS value found in exploratory search.
- Returns success = TRUE if found SS reducing change
- = FALSE if could not find SS reducing change
- */
- ExMove()
- {
- int i,j,N;
- int success;
- double ss; /* value of SS for an exploratory move */
- double ss_; /* minimum value of SS found in this execution of ExMove() */
- DataType step;
- DataType * w;
-
- itncount +=1;
-
- ss_ = patSS;
- for(j=0; j<theNet->OutLayer; j++)
- { N = (theNet->W[j].rows)*(theNet->W[j].cols);
- w = *theNet->W[j].cells;
- for(i=0; i<N; i++, w++)
- { step = RelStep*(fabs(*w) + theNet->maxstep);
- *w = *w + step;
- ss = Compute_SS();
- if(ss<ss_)
- ss_ = ss; /* save new SS and go to next explore step */
- else
- { *w = *w - 2.0*step;
- ss = Compute_SS();
- if(ss<ss_)
- ss_ = ss; /* save new SS and go to next explore step */
- else
- { *w = *w + step; /* restore old parm value since no SS reduction */
- }
- }
- } /* end of for(i=0; k<N; i++, w++) */
- } /* end of for(j=0; j<theNet->OutLayer; j++) */
- success = ( ss_ < patSS ) ? TRUE : FALSE;
- patSS = ss_;
- return(success);
- }
-
- /*----------------------
- Allot memory for Alpha, Pi, and Diag data structures.
- Requires # observations from the data structure.
- Since is always run before execution of method, sets totparms variable.
- */
- AllotSearchWorkSpace()
- {
- int i;
- int mxprms = 1; /* keep track of layer with largest number of parameters */
-
- totparms = 0;
-
- for(i=0; i<theNet->OutLayer; i++)
- { totparms += (theNet->W[i].rows)*(theNet->W[i].cols);
- AllotDTypeVector(&Alpha[i], theNet->Units[i]);
- /* values for Unit[i] in each layer must be set prior
- to execution of this step. Also note that
- the Alpha[0] vector is not used, instead a pseudo vector is
- created from a row of the XData matrix. Thus, the allocation
- for i=0 wastes (Alpha[0].row)*sizeof(DataType) bytes.
- */
- if(Alpha[i].rows > mxprms) mxprms = Alpha[i].rows;
- }
- AllotDTypeVector(&(Alpha[theNet->OutLayer]), 1);
- /* this allots space for output layer, a scaler but aesthetically pleasing */
-
- AllotDTypeVector(&Pi,totparms);
- AllotDTypeVector(&Diag,totparms);
- AllotDTypeVector(&gradSS,totparms);
- }
-
- LockSearchWorkSpace()
- {
- HLock(Pi.cells);
- HLock(Diag.cells);
- HLock(gradSS.cells);
- HLockNet();
- }
-
- UnlockSearchWorkSpace()
- {
- HUnlock(Pi.cells);
- HUnlock(Diag.cells);
- HUnlock(gradSS.cells);
- HUnlockNet();
- }
-